INSN(movdqu8, f2, 0f, 7f, vl, b, vl),
INSN(movdqu16, f2, 0f, 6f, vl, w, vl),
INSN(movdqu16, f2, 0f, 7f, vl, w, vl),
+ INSN(packssdw, 66, 0f, 6b, vl, d_nb, vl),
+ INSN(packsswb, 66, 0f, 63, vl, w, vl),
+ INSN(packusdw, 66, 0f38, 2b, vl, d_nb, vl),
+ INSN(packuswb, 66, 0f, 67, vl, w, vl),
INSN(paddb, 66, 0f, fc, vl, b, vl),
INSN(paddsb, 66, 0f, ec, vl, b, vl),
INSN(paddsw, 66, 0f, ed, vl, w, vl),
[0x25] = { .simd_size = simd_other, .two_op = 1, .d8s = d8s_vl_by_2 },
[0x26 ... 0x29] = { .simd_size = simd_packed_int, .d8s = d8s_vl },
[0x2a] = { .simd_size = simd_packed_int, .two_op = 1, .d8s = d8s_vl },
- [0x2b] = { .simd_size = simd_packed_int },
+ [0x2b] = { .simd_size = simd_packed_int, .d8s = d8s_vl },
[0x2c ... 0x2d] = { .simd_size = simd_packed_fp },
[0x2e ... 0x2f] = { .simd_size = simd_packed_fp, .to_mem = 1 },
[0x30] = { .simd_size = simd_other, .two_op = 1, .d8s = d8s_vl_by_2 },
case X86EMUL_OPC_EVEX_66(0x0f, 0x69): /* vpunpckhwd [xyz]mm/mem,[xyz]mm,[xyz]mm{k} */
op_bytes = 16 << evex.lr;
/* fall through */
+ case X86EMUL_OPC_EVEX_66(0x0f, 0x63): /* vpacksswb [xyz]mm/mem,[xyz]mm,[xyz]mm{k} */
+ case X86EMUL_OPC_EVEX_66(0x0f, 0x67): /* vpackuswb [xyz]mm/mem,[xyz]mm,[xyz]mm{k} */
case X86EMUL_OPC_EVEX_66(0x0f, 0xd1): /* vpsrlw xmm/m128,[xyz]mm,[xyz]mm{k} */
case X86EMUL_OPC_EVEX_66(0x0f, 0xe1): /* vpsraw xmm/m128,[xyz]mm,[xyz]mm{k} */
case X86EMUL_OPC_EVEX_66(0x0f, 0xf1): /* vpsllw xmm/m128,[xyz]mm,[xyz]mm{k} */
avx512_vlen_check(false);
goto simd_zmm;
+ case X86EMUL_OPC_EVEX_66(0x0f, 0x6b): /* vpackssdw [xyz]mm/mem,[xyz]mm,[xyz]mm{k} */
+ case X86EMUL_OPC_EVEX_66(0x0f38, 0x2b): /* vpackusdw [xyz]mm/mem,[xyz]mm,[xyz]mm{k} */
+ generate_exception_if(evex.w || evex.brs, EXC_UD);
+ fault_suppression = false;
+ goto avx512f_no_sae;
+
case X86EMUL_OPC_EVEX_66(0x0f, 0x6c): /* vpunpcklqdq [xyz]mm/mem,[xyz]mm,[xyz]mm{k} */
case X86EMUL_OPC_EVEX_66(0x0f, 0x6d): /* vpunpckhqdq [xyz]mm/mem,[xyz]mm,[xyz]mm{k} */
fault_suppression = false;